library(MASS)
library(Rcpp)
library(imager)
## Loading required package: plyr
## Loading required package: magrittr
## 
## Attaching package: 'imager'
## The following object is masked from 'package:magrittr':
## 
##     add
## The following object is masked from 'package:plyr':
## 
##     liply
## The following objects are masked from 'package:stats':
## 
##     convolve, spectrum
## The following object is masked from 'package:graphics':
## 
##     frame
## The following object is masked from 'package:base':
## 
##     save.image
library(dplyr)
## 
## Attaching package: 'dplyr'
## The following objects are masked from 'package:plyr':
## 
##     arrange, count, desc, failwith, id, mutate, rename, summarise,
##     summarize
## The following object is masked from 'package:MASS':
## 
##     select
## The following objects are masked from 'package:stats':
## 
##     filter, lag
## The following objects are masked from 'package:base':
## 
##     intersect, setdiff, setequal, union
library(animation)
library(PET)
## Loading required package: adimpro
## Loading required package: awsMethods
## 
## Use the function setCores() to change the number of CPU cores.
## 
## Attaching package: 'awsMethods'
## The following object is masked from 'package:magrittr':
## 
##     extract
## Reading RAW images requires to install dcraw, see 
## 
##     http://cybercom.net/~dcoffin/dcraw/ for LINUX and http://www.insflug.org/raw/ 
##     for MAC OS and Windows
## 
## Attaching package: 'adimpro'
## The following object is masked from 'package:dplyr':
## 
##     combine
## 
## Attaching package: 'PET'
## The following object is masked from 'package:base':
## 
##     norm
library(raster)
## Loading required package: sp
## 
## Attaching package: 'raster'
## The following object is masked from 'package:PET':
## 
##     rotate
## The following object is masked from 'package:awsMethods':
## 
##     extract
## The following object is masked from 'package:dplyr':
## 
##     select
## The following object is masked from 'package:magrittr':
## 
##     extract
## The following objects are masked from 'package:MASS':
## 
##     area, select
library(scales)
library(MASS)
library(pracma)  # cot
## 
## Attaching package: 'pracma'
## The following objects are masked from 'package:magrittr':
## 
##     and, mod, or
library("h5")
library(dlm)
library(RcppArmadillo)
library(adimpro)
library(randomForest)
## randomForest 4.6-12
## Type rfNews() to see new features/changes/bug fixes.
## 
## Attaching package: 'randomForest'
## The following object is masked from 'package:adimpro':
## 
##     combine
## The following object is masked from 'package:dplyr':
## 
##     combine
source(file.path("code", "draw_path_on.R"))
# visualization formula

source(file.path("code", "hough.R"))
# hugh transformation related functions

1. Exploratory analysis

# video, speed, angle, timeline are recorded

# dataset was downloaded in advance using code get_data.sh shared by comma.ai on Sept 19, 2016

log <- H5File(file.path("comma-dataset", "log", "2016-06-08--11-46-01.h5"))
## Warning in H5File(file.path("comma-dataset", "log",
## "2016-06-08--11-46-01.h5")): This function is deprecated, use h5file
## instead
image <- H5File(file.path("comma-dataset", "camera", "2016-06-08--11-46-01.h5"))
## Warning in H5File(file.path("comma-dataset", "camera",
## "2016-06-08--11-46-01.h5")): This function is deprecated, use h5file
## instead
log_names <- list.datasets(log, recursive = TRUE)
image_names <- list.datasets(image, recursive = TRUE)

temp <- log[log_names[28]]@dim
speed <- log[log_names[28]][1:temp]
rm(temp)
temp <- log[log_names[35]]@dim
angle <- log[log_names[35]][1:temp]
rm(temp)
angle <- angle/10
temp <- log[log_names[13]]@dim
timeline_image <- log[log_names[13]][1:temp]
rm(temp)

hist(speed)

hist(angle)

index_train <- 11000:13000
index_test <- 13001:14400
# train and test sets are clipped; both are daytime highway driving records

2. Dynamic lane markings detection

i <- 595
k <- index_train[i]
image_use <- (as.cimg(aperm(image[image_names][k,,,], c(4,3,1,2))))
# the original image 1
result <- edges( make.image(image_use[,160:1,1,]), type = "Robertcross")
result <- rgb2grey(result)
# the detected edges in greyscale
plot(image_use)

# plot the original image 1
show.image(result)

# plot the edges

temp.M <- extract.image(result)
temp.MM <- (temp.M > quantile(temp.M, 0.97))*1
temp.MM[,135:160] <- 0
show.image(make.image(temp.MM))

# use 97% quantile threshold to extract lane markings

log_2 <- H5File(file.path("comma-dataset", "log", "2016-02-11--21-32-47.h5"))
## Warning in H5File(file.path("comma-dataset", "log",
## "2016-02-11--21-32-47.h5")): This function is deprecated, use h5file
## instead
image_2 <- H5File(file.path("comma-dataset", "camera", "2016-02-11--21-32-47.h5"))
## Warning in H5File(file.path("comma-dataset", "camera",
## "2016-02-11--21-32-47.h5")): This function is deprecated, use h5file
## instead
log_names_2 <- list.datasets(log_2, recursive = TRUE)
image_names_2 <- list.datasets(image_2, recursive = TRUE)
k2 <- 3000
image_use_2 <- (as.cimg(aperm(image_2[image_names_2][k2,,,], c(4,3,1,2))))
result_2 <- edges( make.image(image_use_2[,160:1,1,]), type = "Robertcross")
result_2 <- rgb2grey(result_2)
plot(image_use_2)

show.image(result_2)

temp.M_2 <- extract.image(result_2)
temp.MM_2 <- (temp.M_2 > quantile(temp.M_2, 0.97))*1
temp.MM_2[,135:160] <- 0
show.image(make.image(temp.MM_2))

# result on image 2: nighttime


par(mfrow = c(2,3))

show.image(make.image(rescale(image_use[,160:1,1,], to = c(0, 65535))))
title("daytime image")

show.image(result)
title("Roberts cross filter")

show.image(make.image(temp.MM))
title("detected lane markings")

show.image(make.image(rescale(image_use_2[,160:1,1,], to = c(0, 65535), from = c(0, 255))))
title("nighttime image")

show.image(result_2)
title("Roberts cross filter")

show.image(make.image(temp.MM_2))
title("detected lane markings", sub = "with 97% quantile threshold")

par(mfrow = c(1,1))

# code for figure 1

3. Vanishing points prediction

temp.M <- extract.image(result)
temp.MM <- (temp.M > quantile(temp.M, 0.97))*1
show.image(make.image(temp.MM))

temp <- which(temp.MM == 1, arr.ind = T)
temp1 <- temp[(temp[,2] <= 135)&(temp[,1] <=160), ]
# lane markings on the left
temp2 <- temp[(temp[,2] <= 135)&(temp[,1] >160), ]
# lane markings on the right

P <- matrix(0, 320, 320)
P[temp1] <- 1
getLineHough_plot(P)
## Using Hough1 --> complete

# the Hough transformation for the left part
P <- matrix(0, 320, 320)
P[temp2] <- 1
getLineHough_plot(P)
## Using Hough1 --> complete

# the Hough transformation for the right part

P <- matrix(0, 320, 320)
P[temp] <- 1
p <- P
length.row <- nrow(p)
abc=hough(p)
## Using Hough1 --> complete
houghParam<-unlist(abc$Header)
image(p,main="Hough lines (blue) and vanishing point (red)", col = gray(c(0, 1)))
P1 <- matrix(0, 320, 320)
P1[temp1] <- 1
getLineHough_plotline_blue(P1)
## Using Hough1 --> complete
P2 <- matrix(0, 320, 320)
P2[temp2] <- 1
getLineHough_plotline_blue(P2)
## Using Hough1 --> complete
# plot the Hough lines from both parts

point <- get.vanishing.point(k)
## Using Hough1 --> complete 
## Using Hough1 --> complete
AAA <- get.vanishing.point.plot(k)
## Using Hough1 --> complete 
## Using Hough1 --> complete
AAAA <- cbind(1-AAA[,1], AAA[,2] - 0.5)
points(AAAA[!is.na(AAAA[,1]),1], AAAA[!is.na(AAAA[,1]),2], pch = 20, col = "yellow", cex = 0.2)
# plot the eligible intersections
points(point[1]+0.5, point[2], pch = 19, col = "red")

# plot the prediction of vanishing point

# code for figure 2

4. Improvements of Robustness & Prediction of steering angle with random forest

# code for getting train set and testing set, data and prediction
# the result has been saved and will be loaded in the next block

set_train <- matrix(0, length(index_train), 4)
for (i in 1:length(index_train)) {
    set_train[i,1] <- angle[min(which(timeline_image == index_train[i]))]
    set_train[i,2] <- speed[min(which(timeline_image == index_train[i]))]
    
}
record <- NULL
for (i in 1:length(index_train)) {
    k <- index_train[i]
    point <- get.vanishing.point(k)
    record <- rbind(record, point)
    ts1 <- ts(record[!is.na(record[,1]), 1])
    ts2 <- ts(record[!is.na(record[,1]), 2])
    ind <- length(ts1)
    if (ind >1) {
        result <- dlmFilter(ts1, dlmModPoly())
        result2 <- dlmFilter(ts2, dlmModPoly())
        new_point <- c(result$m[ind+1,1], result2$m[ind+1,1])
        set_train[i, 3:4] <- new_point
    } else {
        set_train[i, 3:4] <- record
    }
}

set_test <- matrix(0, length(index_test), 4)
for (i in 1:length(index_train)) {
    set_test[i,1] <- angle[min(which(timeline_image == index_test[i]))]
    set_test[i,2] <- speed[min(which(timeline_image == index_test[i]))]
    
}
record <- NULL
length(index_test)
for (i in 1:length(index_train)) {
    k <- index_test[i]
    point <- get.vanishing.point(k)
    record <- rbind(record, point)
    ts1 <- ts(record[!is.na(record[,1]), 1])
    ts2 <- ts(record[!is.na(record[,1]), 2])
    ind <- length(ts1)
    if (ind >1) {
        result <- dlmFilter(ts1, dlmModPoly())
        result2 <- dlmFilter(ts2, dlmModPoly())
        new_point <- c(result$m[ind+1,1], result2$m[ind+1,1])
        set_test[i, 3:4] <- new_point
    } else {
        set_test[i, 3:4] <- record
    }
}
set_train <- readRDS(file.path("temp", "set_train.rds"))
set_test <- readRDS(file.path("temp", "set_test.rds"))
model <- readRDS(file.path("temp", "model.rds"))
predicted.angle <- predict(model, set_test[,2:4])
predicted.angle.train <- predict(model, set_train[,2:4])

Results

mean((predict(model, set_train[,2:4]) - set_train[,1])^2)  #validation MSE
## [1] 25.5074
mean((predict(model, set_test[,2:4]) - set_test[,1])^2)  #test MSE
## [1] 4.459076
mean(angle[index_train])
## [1] -0.04571349
sd(angle[index_train])
## [1] 5.607324
mean(angle[index_test])
## [1] -0.2385519
sd(angle[index_test])
## [1] 2.974674
sqrt(25.51)
## [1] 5.050743
sqrt(4.46)
## [1] 2.111871
importance(model)
##   IncNodePurity
## 1          0.00
## 2      21486.95
## 3       5199.43
# code for the results

i <- 500
k <- index_train[i]
plot(as.cimg(aperm(image[image_names][k,,,], c(4,3,1,2))))
draw_path_on(speed_ms = set_train[i, 2], angle_steers = set_train[i,1], color = "blue")
draw_path_on(speed_ms = set_train[i, 2], angle_steers = predicted.angle.train[i], color = "green")
points((set_train[i,3] + 0.5) * 320, 160 - ((set_train[i,4] + 0.5) * 320 - 160), col = "red", pch=19, cex=1.5)

# code for Figure 3
# code for visualization; 
# results have been uploaded to youtube

saveVideo(for (i in 1:length(index_test)) {
    k <- index_test[i]
    plot(as.cimg(aperm(image[image_names][k,,,], c(4,3,1,2))))
    draw_path_on(speed_ms = set_test[i, 2], angle_steers = set_test[i,1], color = "blue")
    draw_path_on(speed_ms = set_test[i, 2], angle_steers = predicted.angle[i], color = "green")
    points((set_test[i,3] + 0.5) * 320, 160 - ((set_test[i,4] + 0.5) * 320 - 160), col = "red", pch=19, cex=1.5)
}, "test.mp4", interval = 0.05)

saveVideo(for (i in 1:length(index_train)) {
    k <- index_train[i]
    plot(as.cimg(aperm(image[image_names][k,,,], c(4,3,1,2))))
    draw_path_on(speed_ms = set_train[i, 2], angle_steers = set_train[i,1], color = "blue")
    draw_path_on(speed_ms = set_train[i, 2], angle_steers = predicted.angle.train[i], color = "green")
    points((set_train[i,3] + 0.5) * 320, 160 - ((set_train[i,4] + 0.5) * 320 - 160), col = "red", pch=19, cex=1.5)
}, "train.mp4", interval = 0.05)